In [1]:
import glob
import math
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
import numpy as np
import random
import sklearn.metrics as metrics

from tensorflow.keras import optimizers
from tensorflow.keras.callbacks import ModelCheckpoint, CSVLogger, LearningRateScheduler
from tensorflow.keras.models import Model
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.layers import add, concatenate, Conv2D, Dense, Dropout, Flatten, Input
from tensorflow.keras.layers import Activation, AveragePooling2D, BatchNormalization, MaxPooling2D
from tensorflow.keras.regularizers import l2
from tensorflow.keras.utils import to_categorical


%matplotlib inline
In [2]:
                            # Set up 'ggplot' style
plt.style.use('ggplot')     # if want to use the default style, set 'classic'
plt.rcParams['ytick.right']     = True
plt.rcParams['ytick.labelright']= True
plt.rcParams['ytick.left']      = False
plt.rcParams['ytick.labelleft'] = False
plt.rcParams['font.family']     = 'Arial'
In [3]:
# where am i?
%pwd
Out[3]:
'C:\\Users\\david\\Documents\\ImageNet'
In [4]:
flowers = glob.glob('./data/flr_*.jpg')
fungus = glob.glob('./data/fgs_*.jpg')
rocks = glob.glob('./data/rck_*.jpg')

pixel_flowers = glob.glob('./data/pxl_flower_*.jpeg')
pixel_umbrella = glob.glob('./data/pxl_umbrella_*.jpeg')
print("There are %s, %s flower, %s fungus, %s rock and %s umbrella pictures" %(len(flowers), len(pixel_flowers), len(fungus), len(rocks), len(pixel_umbrella)))
There are 1269, 1792 flower, 856 fungus, 1007 rock and 420 umbrella pictures
In [5]:
# Randomly show 10 examples of the images
from IPython.display import Image
    
dataset = flowers #flowers #fungus #rocks

for i in range(0, 5):
    index = random.randint(0, len(dataset)-1)   
    print("Showing:", dataset[index])
    
    img = mpimg.imread(dataset[index])
    imgplot = plt.imshow(img)
    plt.show()

#Image(dataset[index])
Showing: ./data\flr_00431.jpg
Showing: ./data\flr_01489.jpg
Showing: ./data\flr_01485.jpg
Showing: ./data\flr_00088.jpg
Showing: ./data\flr_01066.jpg

Extract the training and testing datasets

In [6]:
# Load the data
trDatOrg       = np.load('flrnonflr-train-imgs96-0.8.npz')['arr_0']
trLblOrg       = np.load('flrnonflr-train-labels96-0.8.npz')['arr_0']
tsDatOrg       = np.load('flrnonflr-test-imgs96-0.8.npz')['arr_0']
tsLblOrg       = np.load('flrnonflr-test-labels96-0.8.npz')['arr_0']
In [7]:
print("For the training and test datasets:")
print("The shapes are %s, %s, %s, %s" \
      %(trDatOrg.shape, trLblOrg.shape, tsDatOrg.shape, tsLblOrg.shape))
For the training and test datasets:
The shapes are (4264, 96, 96, 3), (4264,), (1067, 96, 96, 3), (1067,)
In [8]:
# Randomly show 10 examples of the images

data = tsDatOrg
label = tsLblOrg

for i in range(20):
    index = random.randint(0, len(data)-1)
    print("Showing %s index image, It is %s" %(index, label[index]))
    imgplot = plt.imshow(data[index])
    plt.show()
Showing 653 index image, It is 0.0
Showing 279 index image, It is 1.0
Showing 292 index image, It is 1.0
Showing 943 index image, It is 0.0
Showing 989 index image, It is 0.0
Showing 972 index image, It is 0.0
Showing 218 index image, It is 1.0
Showing 309 index image, It is 1.0
Showing 560 index image, It is 1.0
Showing 622 index image, It is 0.0
Showing 471 index image, It is 1.0
Showing 218 index image, It is 1.0
Showing 926 index image, It is 0.0
Showing 674 index image, It is 0.0
Showing 267 index image, It is 1.0
Showing 163 index image, It is 1.0
Showing 177 index image, It is 1.0
Showing 672 index image, It is 0.0
Showing 143 index image, It is 1.0
Showing 878 index image, It is 0.0
In [9]:
# Convert the data into 'float32'
# Rescale the values from 0~255 to 0~1
trDat       = trDatOrg.astype('float32')/255
tsDat       = tsDatOrg.astype('float32')/255

# Retrieve the row size of each image
# Retrieve the column size of each image
imgrows     = trDat.shape[1]
imgclms     = trDat.shape[2]
channel     = 3

# # reshape the data to be [samples][width][height][channel]
# # This is required by Keras framework
# trDat       = trDat.reshape(trDat.shape[0], imgrows, imgclms, channel)
# tsDat       = tsDat.reshape(tsDat.shape[0], imgrows, imgclms, channel)

# Perform one hot encoding on the labels
# Retrieve the number of classes in this problem
trLbl       = to_categorical(trLblOrg)
tsLbl       = to_categorical(tsLblOrg)
num_classes = tsLbl.shape[1]
In [10]:
# fix random seed for reproducibility
seed = 29
np.random.seed(seed)


modelname = 'FlowerPower'

def createBaselineModel():
    inputs = Input(shape=(imgrows, imgclms, channel))
    x = Conv2D(30, (4, 4), activation='relu')(inputs)
    x = MaxPooling2D(pool_size=(2, 2))(x)
    x = Conv2D(50, (4, 4), activation='relu')(x)
    x = MaxPooling2D(pool_size=(2, 2))(x)
    x = Dropout(0.3)(x)
    x = Flatten()(x)
    x = Dense(32, activation='relu')(x)
    x = Dense(num_classes, activation='softmax')(x)
    
    model = Model(inputs=[inputs],outputs=x)
    
    model.compile(loss='categorical_crossentropy', 
                  optimizer='adam',
                  metrics=['accuracy'])
    return model

def resLyr(inputs,
           numFilters=16,
           kernelSz=3,
           strides=1,
           activation='relu',
           batchNorm=True,
           convFirst=True,
           lyrName=None):
    convLyr = Conv2D(numFilters, kernel_size=kernelSz, strides=strides, 
                     padding='same', kernel_initializer='he_normal', 
                     kernel_regularizer=l2(1e-4), 
                     name=lyrName+'_conv' if lyrName else None)
    x = inputs
    if convFirst:
        x = convLyr(x)
        if batchNorm:
            x = BatchNormalization(name=lyrName+'_bn' if lyrName else None)(x)
        if activation is not None:
            x = Activation(activation,name=lyrName+'_'+activation if lyrName else None)(x)
    else:
        if batchNorm:
            x = BatchNormalization(name=lyrName+'_bn' if lyrName else None)(x)
        if activation is not None:
            x = Activation(activation, name=lyrName+'_'+activation if lyrName else None)(x)
        x = convLyr(x)
    return x


def resBlkV1(inputs,
             numFilters=16,
             numBlocks=3,
             downsampleOnFirst=True,
             names=None):
    x = inputs
    for run in range(0,numBlocks):
        strides = 1
        blkStr = str(run+1)
        if downsampleOnFirst and run == 0:
            strides = 2
        y = resLyr(inputs=x, numFilters=numFilters, strides=strides,
                   lyrName=names+'_Blk'+blkStr+'_Res1' if names else None)
        y = resLyr(inputs=y, numFilters=numFilters, activation=None,
                   lyrName=names+'_Blk'+blkStr+'_Res2' if names else None)
        if downsampleOnFirst and run == 0:
            x = resLyr(inputs=x, numFilters=numFilters, kernelSz=1,
                       strides=strides, activation=None, batchNorm=False,
                       lyrName=names+'_Blk'+blkStr+'_lin' if names else None)
        x = add([x,y], name=names+'_Blk'+blkStr+'_add' if names else None)
        x = Activation('relu', name=names+'_Blk'+blkStr+'_relu' if names else None)(x)
    return x

#optmz = optimizers.Adam(lr=0.001)
optmz = optimizers.RMSprop(lr=0.001)

def createResNetV1(inputShape=(imgrows, imgclms, channel),
                   numClasses=2):
    inputs = Input(shape=inputShape)
    v = resLyr(inputs, lyrName='Inpt')
    v = resBlkV1(inputs=v, numFilters=16, numBlocks=3,
                 downsampleOnFirst=False, names='Stg1')
    v = Dropout(0.30)(v)
    v = resBlkV1(inputs=v, numFilters=32, numBlocks=3,
                 downsampleOnFirst=True, names='Stg2')
    v = Dropout(0.40)(v)
    v = resBlkV1(inputs=v, numFilters=64, numBlocks=3,
                 downsampleOnFirst=True, names='Stg3')
    v = Dropout(0.50)(v)
    v = resBlkV1(inputs=v, numFilters=128, numBlocks=3,
                 downsampleOnFirst=True, names='Stg4')
    v = Dropout(0.50)(v)
    v = resBlkV1(inputs=v, numFilters=128, numBlocks=3,
                 downsampleOnFirst=False, names='Stg5')
    v = Dropout(0.50)(v)
    v = resBlkV1(inputs=v, numFilters=256, numBlocks=3,
                 downsampleOnFirst=True, names='Stg6')
    v = Dropout(0.50)(v)
    v = AveragePooling2D(pool_size=6, name='AvgPool')(v)
    v = Flatten()(v) 
    outputs = Dense(numClasses, activation='softmax', 
                    kernel_initializer='he_normal')(v)
    model = Model(inputs=inputs,outputs=outputs)
    model.compile(loss='categorical_crossentropy', 
                  optimizer=optmz, 
                  metrics=['accuracy'])
    return model



# Setup the models
model       = createResNetV1() # This is meant for training
modelGo     = createResNetV1() # This is used for final testing

model.summary()
WARNING:tensorflow:From D:\DocumentsDDrive\Installed_Files\Anaconda3\envs\tf-gpu\lib\site-packages\tensorflow\python\keras\initializers.py:104: calling VarianceScaling.__init__ (from tensorflow.python.ops.init_ops) with distribution=normal is deprecated and will be removed in a future version.
Instructions for updating:
`normal` is a deprecated alias for `truncated_normal`
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_1 (InputLayer)            (None, 96, 96, 3)    0                                            
__________________________________________________________________________________________________
Inpt_conv (Conv2D)              (None, 96, 96, 16)   448         input_1[0][0]                    
__________________________________________________________________________________________________
Inpt_bn (BatchNormalization)    (None, 96, 96, 16)   64          Inpt_conv[0][0]                  
__________________________________________________________________________________________________
Inpt_relu (Activation)          (None, 96, 96, 16)   0           Inpt_bn[0][0]                    
__________________________________________________________________________________________________
Stg1_Blk1_Res1_conv (Conv2D)    (None, 96, 96, 16)   2320        Inpt_relu[0][0]                  
__________________________________________________________________________________________________
Stg1_Blk1_Res1_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk1_Res1_relu (Activation (None, 96, 96, 16)   0           Stg1_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk1_Res2_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg1_Blk1_Res2_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk1_add (Add)             (None, 96, 96, 16)   0           Inpt_relu[0][0]                  
                                                                 Stg1_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk1_relu (Activation)     (None, 96, 96, 16)   0           Stg1_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg1_Blk2_Res1_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg1_Blk2_Res1_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk2_Res1_relu (Activation (None, 96, 96, 16)   0           Stg1_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk2_Res2_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg1_Blk2_Res2_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk2_add (Add)             (None, 96, 96, 16)   0           Stg1_Blk1_relu[0][0]             
                                                                 Stg1_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk2_relu (Activation)     (None, 96, 96, 16)   0           Stg1_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg1_Blk3_Res1_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg1_Blk3_Res1_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk3_Res1_relu (Activation (None, 96, 96, 16)   0           Stg1_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk3_Res2_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg1_Blk3_Res2_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk3_add (Add)             (None, 96, 96, 16)   0           Stg1_Blk2_relu[0][0]             
                                                                 Stg1_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk3_relu (Activation)     (None, 96, 96, 16)   0           Stg1_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout (Dropout)               (None, 96, 96, 16)   0           Stg1_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg2_Blk1_Res1_conv (Conv2D)    (None, 48, 48, 32)   4640        dropout[0][0]                    
__________________________________________________________________________________________________
Stg2_Blk1_Res1_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk1_Res1_relu (Activation (None, 48, 48, 32)   0           Stg2_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk1_Res2_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg2_Blk1_lin_conv (Conv2D)     (None, 48, 48, 32)   544         dropout[0][0]                    
__________________________________________________________________________________________________
Stg2_Blk1_Res2_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk1_add (Add)             (None, 48, 48, 32)   0           Stg2_Blk1_lin_conv[0][0]         
                                                                 Stg2_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk1_relu (Activation)     (None, 48, 48, 32)   0           Stg2_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg2_Blk2_Res1_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg2_Blk2_Res1_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk2_Res1_relu (Activation (None, 48, 48, 32)   0           Stg2_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk2_Res2_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg2_Blk2_Res2_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk2_add (Add)             (None, 48, 48, 32)   0           Stg2_Blk1_relu[0][0]             
                                                                 Stg2_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk2_relu (Activation)     (None, 48, 48, 32)   0           Stg2_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg2_Blk3_Res1_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg2_Blk3_Res1_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk3_Res1_relu (Activation (None, 48, 48, 32)   0           Stg2_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk3_Res2_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg2_Blk3_Res2_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk3_add (Add)             (None, 48, 48, 32)   0           Stg2_Blk2_relu[0][0]             
                                                                 Stg2_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk3_relu (Activation)     (None, 48, 48, 32)   0           Stg2_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout_1 (Dropout)             (None, 48, 48, 32)   0           Stg2_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg3_Blk1_Res1_conv (Conv2D)    (None, 24, 24, 64)   18496       dropout_1[0][0]                  
__________________________________________________________________________________________________
Stg3_Blk1_Res1_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk1_Res1_relu (Activation (None, 24, 24, 64)   0           Stg3_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk1_Res2_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg3_Blk1_lin_conv (Conv2D)     (None, 24, 24, 64)   2112        dropout_1[0][0]                  
__________________________________________________________________________________________________
Stg3_Blk1_Res2_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk1_add (Add)             (None, 24, 24, 64)   0           Stg3_Blk1_lin_conv[0][0]         
                                                                 Stg3_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk1_relu (Activation)     (None, 24, 24, 64)   0           Stg3_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg3_Blk2_Res1_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg3_Blk2_Res1_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk2_Res1_relu (Activation (None, 24, 24, 64)   0           Stg3_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk2_Res2_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg3_Blk2_Res2_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk2_add (Add)             (None, 24, 24, 64)   0           Stg3_Blk1_relu[0][0]             
                                                                 Stg3_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk2_relu (Activation)     (None, 24, 24, 64)   0           Stg3_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg3_Blk3_Res1_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg3_Blk3_Res1_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk3_Res1_relu (Activation (None, 24, 24, 64)   0           Stg3_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk3_Res2_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg3_Blk3_Res2_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk3_add (Add)             (None, 24, 24, 64)   0           Stg3_Blk2_relu[0][0]             
                                                                 Stg3_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk3_relu (Activation)     (None, 24, 24, 64)   0           Stg3_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout_2 (Dropout)             (None, 24, 24, 64)   0           Stg3_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg4_Blk1_Res1_conv (Conv2D)    (None, 12, 12, 128)  73856       dropout_2[0][0]                  
__________________________________________________________________________________________________
Stg4_Blk1_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk1_Res1_relu (Activation (None, 12, 12, 128)  0           Stg4_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk1_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg4_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg4_Blk1_lin_conv (Conv2D)     (None, 12, 12, 128)  8320        dropout_2[0][0]                  
__________________________________________________________________________________________________
Stg4_Blk1_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk1_add (Add)             (None, 12, 12, 128)  0           Stg4_Blk1_lin_conv[0][0]         
                                                                 Stg4_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk1_relu (Activation)     (None, 12, 12, 128)  0           Stg4_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg4_Blk2_Res1_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg4_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg4_Blk2_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk2_Res1_relu (Activation (None, 12, 12, 128)  0           Stg4_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk2_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg4_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg4_Blk2_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk2_add (Add)             (None, 12, 12, 128)  0           Stg4_Blk1_relu[0][0]             
                                                                 Stg4_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk2_relu (Activation)     (None, 12, 12, 128)  0           Stg4_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg4_Blk3_Res1_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg4_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg4_Blk3_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk3_Res1_relu (Activation (None, 12, 12, 128)  0           Stg4_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk3_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg4_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg4_Blk3_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk3_add (Add)             (None, 12, 12, 128)  0           Stg4_Blk2_relu[0][0]             
                                                                 Stg4_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk3_relu (Activation)     (None, 12, 12, 128)  0           Stg4_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout_3 (Dropout)             (None, 12, 12, 128)  0           Stg4_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg5_Blk1_Res1_conv (Conv2D)    (None, 12, 12, 128)  147584      dropout_3[0][0]                  
__________________________________________________________________________________________________
Stg5_Blk1_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk1_Res1_relu (Activation (None, 12, 12, 128)  0           Stg5_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk1_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg5_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg5_Blk1_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk1_add (Add)             (None, 12, 12, 128)  0           dropout_3[0][0]                  
                                                                 Stg5_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk1_relu (Activation)     (None, 12, 12, 128)  0           Stg5_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg5_Blk2_Res1_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg5_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg5_Blk2_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk2_Res1_relu (Activation (None, 12, 12, 128)  0           Stg5_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk2_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg5_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg5_Blk2_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk2_add (Add)             (None, 12, 12, 128)  0           Stg5_Blk1_relu[0][0]             
                                                                 Stg5_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk2_relu (Activation)     (None, 12, 12, 128)  0           Stg5_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg5_Blk3_Res1_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg5_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg5_Blk3_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk3_Res1_relu (Activation (None, 12, 12, 128)  0           Stg5_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk3_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg5_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg5_Blk3_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk3_add (Add)             (None, 12, 12, 128)  0           Stg5_Blk2_relu[0][0]             
                                                                 Stg5_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk3_relu (Activation)     (None, 12, 12, 128)  0           Stg5_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout_4 (Dropout)             (None, 12, 12, 128)  0           Stg5_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg6_Blk1_Res1_conv (Conv2D)    (None, 6, 6, 256)    295168      dropout_4[0][0]                  
__________________________________________________________________________________________________
Stg6_Blk1_Res1_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk1_Res1_relu (Activation (None, 6, 6, 256)    0           Stg6_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk1_Res2_conv (Conv2D)    (None, 6, 6, 256)    590080      Stg6_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg6_Blk1_lin_conv (Conv2D)     (None, 6, 6, 256)    33024       dropout_4[0][0]                  
__________________________________________________________________________________________________
Stg6_Blk1_Res2_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk1_add (Add)             (None, 6, 6, 256)    0           Stg6_Blk1_lin_conv[0][0]         
                                                                 Stg6_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk1_relu (Activation)     (None, 6, 6, 256)    0           Stg6_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg6_Blk2_Res1_conv (Conv2D)    (None, 6, 6, 256)    590080      Stg6_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg6_Blk2_Res1_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk2_Res1_relu (Activation (None, 6, 6, 256)    0           Stg6_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk2_Res2_conv (Conv2D)    (None, 6, 6, 256)    590080      Stg6_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg6_Blk2_Res2_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk2_add (Add)             (None, 6, 6, 256)    0           Stg6_Blk1_relu[0][0]             
                                                                 Stg6_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk2_relu (Activation)     (None, 6, 6, 256)    0           Stg6_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg6_Blk3_Res1_conv (Conv2D)    (None, 6, 6, 256)    590080      Stg6_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg6_Blk3_Res1_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk3_Res1_relu (Activation (None, 6, 6, 256)    0           Stg6_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk3_Res2_conv (Conv2D)    (None, 6, 6, 256)    590080      Stg6_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg6_Blk3_Res2_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk3_add (Add)             (None, 6, 6, 256)    0           Stg6_Blk2_relu[0][0]             
                                                                 Stg6_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk3_relu (Activation)     (None, 6, 6, 256)    0           Stg6_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout_5 (Dropout)             (None, 6, 6, 256)    0           Stg6_Blk3_relu[0][0]             
__________________________________________________________________________________________________
AvgPool (AveragePooling2D)      (None, 1, 1, 256)    0           dropout_5[0][0]                  
__________________________________________________________________________________________________
flatten (Flatten)               (None, 256)          0           AvgPool[0][0]                    
__________________________________________________________________________________________________
dense (Dense)                   (None, 2)            514         flatten[0][0]                    
==================================================================================================
Total params: 5,270,786
Trainable params: 5,263,266
Non-trainable params: 7,520
__________________________________________________________________________________________________
In [11]:
# Create checkpoint for the training
# This checkpoint performs model saving when
# an epoch gives highest testing accuracy
# filepath        = modelname + ".hdf5"
# checkpoint      = ModelCheckpoint(filepath, 
#                                   monitor='val_acc', 
#                                   verbose=0, 
#                                   save_best_only=True, 
#                                   mode='max')

#                             # Log the epoch detail into csv
# csv_logger      = CSVLogger(modelname +'.csv')
# callbacks_list  = [checkpoint,csv_logger]

def lrSchedule(epoch):
    lr  = 1e-3
    
    if epoch > 190:
        lr  *= 0.5e-3
        
    elif epoch > 160:
        lr  *= 1e-3
        
    elif epoch > 140:
        lr  *= 1e-2
        
    elif epoch > 100:
        lr  *= 1e-1
        
    print('Learning rate: ', lr)
    
    return lr

LRScheduler     = LearningRateScheduler(lrSchedule)

                            # Create checkpoint for the training
                            # This checkpoint performs model saving when
                            # an epoch gives highest testing accuracy
filepath        = modelname + ".hdf5"
checkpoint      = ModelCheckpoint(filepath, 
                                  monitor='val_acc', 
                                  verbose=0, 
                                  save_best_only=True, 
                                  mode='max')

                            # Log the epoch detail into csv
csv_logger      = CSVLogger(modelname +'.csv')
callbacks_list  = [checkpoint, csv_logger, LRScheduler]
#callbacks_list  = [checkpoint, csv_logger]
In [12]:
# Fit the model
# This is where the training starts
# model.fit(trDat, 
#           trLbl, 
#           validation_data=(tsDat, tsLbl), 
#           epochs=120, 
#           batch_size=32,
#           callbacks=callbacks_list)

datagen = ImageDataGenerator(width_shift_range=0.25,
                             height_shift_range=0.25,
                             rotation_range=45,
                             zoom_range=0.8,
                             #zca_epsilon=1e-6,
                             #zca_whitening=True,
                             fill_mode='nearest',
                             horizontal_flip=True,
                             vertical_flip=False)

model.fit_generator(datagen.flow(trDat, trLbl, batch_size=24),
                    validation_data=(tsDat, tsLbl),
                    epochs=200, 
                    verbose=1,
                    steps_per_epoch=len(trDat)/24,
                    callbacks=callbacks_list)
Learning rate:  0.001
Epoch 1/200
178/177 [==============================] - 58s 328ms/step - loss: 2.1327 - acc: 0.6351 - val_loss: 2.0565 - val_acc: 0.6485
Learning rate:  0.001
Epoch 2/200
178/177 [==============================] - 37s 207ms/step - loss: 1.3059 - acc: 0.7198 - val_loss: 1.6505 - val_acc: 0.6326
Learning rate:  0.001
Epoch 3/200
178/177 [==============================] - 37s 208ms/step - loss: 1.0295 - acc: 0.7589 - val_loss: 1.1845 - val_acc: 0.5829
Learning rate:  0.001
Epoch 4/200
178/177 [==============================] - 37s 209ms/step - loss: 0.9074 - acc: 0.7646 - val_loss: 0.8381 - val_acc: 0.6804
Learning rate:  0.001
Epoch 5/200
178/177 [==============================] - 38s 212ms/step - loss: 0.7966 - acc: 0.7680 - val_loss: 0.7576 - val_acc: 0.7235
Learning rate:  0.001
Epoch 6/200
178/177 [==============================] - 37s 208ms/step - loss: 0.7146 - acc: 0.7843 - val_loss: 0.7182 - val_acc: 0.7095
Learning rate:  0.001
Epoch 7/200
178/177 [==============================] - 37s 208ms/step - loss: 0.6779 - acc: 0.7872 - val_loss: 0.7360 - val_acc: 0.6504
Learning rate:  0.001
Epoch 8/200
178/177 [==============================] - 37s 208ms/step - loss: 0.6221 - acc: 0.8023 - val_loss: 0.7374 - val_acc: 0.6963
Learning rate:  0.001
Epoch 9/200
178/177 [==============================] - 37s 210ms/step - loss: 0.6019 - acc: 0.8086 - val_loss: 1.3777 - val_acc: 0.6036
Learning rate:  0.001
Epoch 10/200
178/177 [==============================] - 37s 209ms/step - loss: 0.5791 - acc: 0.8133 - val_loss: 0.5244 - val_acc: 0.8276
Learning rate:  0.001
Epoch 11/200
178/177 [==============================] - 37s 208ms/step - loss: 0.5470 - acc: 0.8140 - val_loss: 0.5551 - val_acc: 0.8210
Learning rate:  0.001
Epoch 12/200
178/177 [==============================] - 37s 208ms/step - loss: 0.5390 - acc: 0.8110 - val_loss: 0.5852 - val_acc: 0.8172
Learning rate:  0.001
Epoch 13/200
178/177 [==============================] - 37s 209ms/step - loss: 0.5072 - acc: 0.8246 - val_loss: 0.4944 - val_acc: 0.8650
Learning rate:  0.001
Epoch 14/200
178/177 [==============================] - 37s 209ms/step - loss: 0.4904 - acc: 0.8320 - val_loss: 0.4886 - val_acc: 0.8828 loss: 0.4891 - acc: 
Learning rate:  0.001
Epoch 15/200
178/177 [==============================] - 37s 210ms/step - loss: 0.4884 - acc: 0.8312 - val_loss: 0.5177 - val_acc: 0.8135
Learning rate:  0.001
Epoch 16/200
178/177 [==============================] - 37s 208ms/step - loss: 0.4782 - acc: 0.8332 - val_loss: 0.4433 - val_acc: 0.8416
Learning rate:  0.001
Epoch 17/200
178/177 [==============================] - 37s 208ms/step - loss: 0.4584 - acc: 0.8409 - val_loss: 0.4408 - val_acc: 0.8500
Learning rate:  0.001
Epoch 18/200
178/177 [==============================] - 37s 208ms/step - loss: 0.4633 - acc: 0.8418 - val_loss: 0.5593 - val_acc: 0.7966
Learning rate:  0.001
Epoch 19/200
178/177 [==============================] - 37s 208ms/step - loss: 0.4642 - acc: 0.8430 - val_loss: 0.4755 - val_acc: 0.8369
Learning rate:  0.001
Epoch 20/200
178/177 [==============================] - 38s 211ms/step - loss: 0.4495 - acc: 0.8443 - val_loss: 0.3985 - val_acc: 0.8754
Learning rate:  0.001
Epoch 21/200
178/177 [==============================] - 37s 208ms/step - loss: 0.4510 - acc: 0.8393 - val_loss: 0.4039 - val_acc: 0.8641
Learning rate:  0.001
Epoch 22/200
178/177 [==============================] - 37s 208ms/step - loss: 0.4342 - acc: 0.8498 - val_loss: 0.3908 - val_acc: 0.8754
Learning rate:  0.001
Epoch 23/200
178/177 [==============================] - 37s 209ms/step - loss: 0.4187 - acc: 0.8557 - val_loss: 0.3835 - val_acc: 0.8903
Learning rate:  0.001
Epoch 24/200
178/177 [==============================] - 37s 208ms/step - loss: 0.4269 - acc: 0.8564 - val_loss: 0.4033 - val_acc: 0.8547
Learning rate:  0.001
Epoch 25/200
178/177 [==============================] - 37s 208ms/step - loss: 0.4284 - acc: 0.8459 - val_loss: 0.3817 - val_acc: 0.8679
Learning rate:  0.001
Epoch 26/200
178/177 [==============================] - 37s 208ms/step - loss: 0.4305 - acc: 0.8455 - val_loss: 0.5191 - val_acc: 0.7994
Learning rate:  0.001
Epoch 27/200
178/177 [==============================] - 38s 211ms/step - loss: 0.4227 - acc: 0.8488 - val_loss: 0.4080 - val_acc: 0.8369
Learning rate:  0.001
Epoch 28/200
178/177 [==============================] - 37s 210ms/step - loss: 0.3985 - acc: 0.8615 - val_loss: 0.3490 - val_acc: 0.8866
Learning rate:  0.001
Epoch 29/200
178/177 [==============================] - 37s 209ms/step - loss: 0.4003 - acc: 0.8548 - val_loss: 0.3512 - val_acc: 0.8950
Learning rate:  0.001
Epoch 30/200
178/177 [==============================] - 37s 209ms/step - loss: 0.3939 - acc: 0.8574 - val_loss: 0.3232 - val_acc: 0.8978
Learning rate:  0.001
Epoch 31/200
178/177 [==============================] - 37s 208ms/step - loss: 0.4153 - acc: 0.8440 - val_loss: 0.3661 - val_acc: 0.8810
Learning rate:  0.001
Epoch 32/200
178/177 [==============================] - 37s 208ms/step - loss: 0.4094 - acc: 0.8469 - val_loss: 0.3773 - val_acc: 0.8575
Learning rate:  0.001
Epoch 33/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3958 - acc: 0.8586 - val_loss: 0.3659 - val_acc: 0.8735
Learning rate:  0.001
Epoch 34/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3876 - acc: 0.8612 - val_loss: 0.3781 - val_acc: 0.8894
Learning rate:  0.001
Epoch 35/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3962 - acc: 0.8509 - val_loss: 0.3279 - val_acc: 0.8941
Learning rate:  0.001
Epoch 36/200
178/177 [==============================] - 38s 211ms/step - loss: 0.3945 - acc: 0.8597 - val_loss: 0.4190 - val_acc: 0.8557
Learning rate:  0.001
Epoch 37/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3925 - acc: 0.8600 - val_loss: 0.3410 - val_acc: 0.8922
Learning rate:  0.001
Epoch 38/200
178/177 [==============================] - 38s 211ms/step - loss: 0.3874 - acc: 0.8611 - val_loss: 0.4770 - val_acc: 0.8416
Learning rate:  0.001
Epoch 39/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3876 - acc: 0.8645 - val_loss: 0.4241 - val_acc: 0.8397
Learning rate:  0.001
Epoch 40/200
178/177 [==============================] - 38s 213ms/step - loss: 0.3832 - acc: 0.8590 - val_loss: 0.7564 - val_acc: 0.6870
Learning rate:  0.001
Epoch 41/200
178/177 [==============================] - 37s 210ms/step - loss: 0.3869 - acc: 0.8555 - val_loss: 0.3265 - val_acc: 0.8950
Learning rate:  0.001
Epoch 42/200
178/177 [==============================] - 37s 209ms/step - loss: 0.3778 - acc: 0.8683 - val_loss: 0.3164 - val_acc: 0.9082
Learning rate:  0.001
Epoch 43/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3808 - acc: 0.8617 - val_loss: 0.3785 - val_acc: 0.8903
Learning rate:  0.001
Epoch 44/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3769 - acc: 0.8617 - val_loss: 0.3732 - val_acc: 0.8510
Learning rate:  0.001
Epoch 45/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3784 - acc: 0.8640 - val_loss: 0.4061 - val_acc: 0.8500
Learning rate:  0.001
Epoch 46/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3741 - acc: 0.8661 - val_loss: 0.3230 - val_acc: 0.8847
Learning rate:  0.001
Epoch 47/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3687 - acc: 0.8612 - val_loss: 0.3440 - val_acc: 0.8791
Learning rate:  0.001
Epoch 48/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3745 - acc: 0.8687 - val_loss: 0.4086 - val_acc: 0.8519
Learning rate:  0.001
Epoch 49/200
178/177 [==============================] - 37s 211ms/step - loss: 0.3652 - acc: 0.8694 - val_loss: 0.3591 - val_acc: 0.8941 ETA: 1s - loss: 0.3605 - ac
Learning rate:  0.001
Epoch 50/200
178/177 [==============================] - 37s 211ms/step - loss: 0.3571 - acc: 0.8708 - val_loss: 0.3228 - val_acc: 0.8800
Learning rate:  0.001
Epoch 51/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3598 - acc: 0.8700 - val_loss: 0.3450 - val_acc: 0.8950
Learning rate:  0.001
Epoch 52/200
178/177 [==============================] - 38s 211ms/step - loss: 0.3665 - acc: 0.8699 - val_loss: 0.3716 - val_acc: 0.8679
Learning rate:  0.001
Epoch 53/200
178/177 [==============================] - 38s 211ms/step - loss: 0.3641 - acc: 0.8667 - val_loss: 0.3055 - val_acc: 0.8978
Learning rate:  0.001
Epoch 54/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3640 - acc: 0.8755 - val_loss: 0.3764 - val_acc: 0.8894
Learning rate:  0.001
Epoch 55/200
178/177 [==============================] - 37s 210ms/step - loss: 0.3463 - acc: 0.8787 - val_loss: 0.3496 - val_acc: 0.8978
Learning rate:  0.001
Epoch 56/200
178/177 [==============================] - 37s 210ms/step - loss: 0.3674 - acc: 0.8662 - val_loss: 0.2937 - val_acc: 0.8997
Learning rate:  0.001
Epoch 57/200
178/177 [==============================] - 38s 211ms/step - loss: 0.3483 - acc: 0.8748 - val_loss: 0.3197 - val_acc: 0.8941
Learning rate:  0.001
Epoch 58/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3551 - acc: 0.8697 - val_loss: 0.3038 - val_acc: 0.8932
Learning rate:  0.001
Epoch 59/200
178/177 [==============================] - 38s 211ms/step - loss: 0.3382 - acc: 0.8797 - val_loss: 0.3172 - val_acc: 0.8969
Learning rate:  0.001
Epoch 60/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3639 - acc: 0.8638 - val_loss: 1.0493 - val_acc: 0.7807
Learning rate:  0.001
Epoch 61/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3503 - acc: 0.8741 - val_loss: 0.2922 - val_acc: 0.8988
Learning rate:  0.001
Epoch 62/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3572 - acc: 0.8670 - val_loss: 0.3119 - val_acc: 0.8960
Learning rate:  0.001
Epoch 63/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3420 - acc: 0.8803 - val_loss: 0.2969 - val_acc: 0.8960
Learning rate:  0.001
Epoch 64/200
178/177 [==============================] - 37s 209ms/step - loss: 0.3511 - acc: 0.8708 - val_loss: 0.2874 - val_acc: 0.9091 - loss: 0.346
Learning rate:  0.001
Epoch 65/200
178/177 [==============================] - 37s 210ms/step - loss: 0.3487 - acc: 0.8757 - val_loss: 0.6475 - val_acc: 0.8529
Learning rate:  0.001
Epoch 66/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3466 - acc: 0.8768 - val_loss: 0.3429 - val_acc: 0.8922
Learning rate:  0.001
Epoch 67/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3553 - acc: 0.8695 - val_loss: 0.4331 - val_acc: 0.8472
Learning rate:  0.001
Epoch 68/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3478 - acc: 0.8773 - val_loss: 0.3316 - val_acc: 0.8828
Learning rate:  0.001
Epoch 69/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3428 - acc: 0.8699 - val_loss: 0.3104 - val_acc: 0.8960
Learning rate:  0.001
Epoch 70/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3529 - acc: 0.8731 - val_loss: 0.2995 - val_acc: 0.8988
Learning rate:  0.001
Epoch 71/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3491 - acc: 0.8746 - val_loss: 0.3431 - val_acc: 0.8707
Learning rate:  0.001
Epoch 72/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3470 - acc: 0.8742 - val_loss: 0.4181 - val_acc: 0.8585
Learning rate:  0.001
Epoch 73/200
178/177 [==============================] - 38s 212ms/step - loss: 0.3460 - acc: 0.8820 - val_loss: 0.3378 - val_acc: 0.8735
Learning rate:  0.001
Epoch 74/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3319 - acc: 0.8789 - val_loss: 0.4746 - val_acc: 0.8360s - los
Learning rate:  0.001
Epoch 75/200
178/177 [==============================] - 37s 210ms/step - loss: 0.3354 - acc: 0.8798 - val_loss: 0.3514 - val_acc: 0.8922
Learning rate:  0.001
Epoch 76/200
178/177 [==============================] - 37s 207ms/step - loss: 0.3421 - acc: 0.8723 - val_loss: 0.3175 - val_acc: 0.8857
Learning rate:  0.001
Epoch 77/200
178/177 [==============================] - 37s 210ms/step - loss: 0.3586 - acc: 0.8728 - val_loss: 0.3177 - val_acc: 0.8894
Learning rate:  0.001
Epoch 78/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3468 - acc: 0.8706 - val_loss: 0.5373 - val_acc: 0.7985
Learning rate:  0.001
Epoch 79/200
178/177 [==============================] - 37s 210ms/step - loss: 0.3430 - acc: 0.8709 - val_loss: 0.3150 - val_acc: 0.9016
Learning rate:  0.001
Epoch 80/200
178/177 [==============================] - 37s 207ms/step - loss: 0.3322 - acc: 0.8830 - val_loss: 0.3312 - val_acc: 0.8828
Learning rate:  0.001
Epoch 81/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3262 - acc: 0.8798 - val_loss: 0.3017 - val_acc: 0.8969
Learning rate:  0.001
Epoch 82/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3376 - acc: 0.8839 - val_loss: 0.2941 - val_acc: 0.9091
Learning rate:  0.001
Epoch 83/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3353 - acc: 0.8785 - val_loss: 0.3022 - val_acc: 0.90911s - loss: 0.3301 - ac
Learning rate:  0.001
Epoch 84/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3372 - acc: 0.8807 - val_loss: 0.3558 - val_acc: 0.8772
Learning rate:  0.001
Epoch 85/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3459 - acc: 0.8729 - val_loss: 0.2946 - val_acc: 0.9025
Learning rate:  0.001
Epoch 86/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3456 - acc: 0.8727 - val_loss: 0.3348 - val_acc: 0.8941
Learning rate:  0.001
Epoch 87/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3415 - acc: 0.8805 - val_loss: 0.3060 - val_acc: 0.8932
Learning rate:  0.001
Epoch 88/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3368 - acc: 0.8797 - val_loss: 0.2770 - val_acc: 0.9025
Learning rate:  0.001
Epoch 89/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3389 - acc: 0.8802 - val_loss: 0.3327 - val_acc: 0.8754
Learning rate:  0.001
Epoch 90/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3319 - acc: 0.8798 - val_loss: 0.3982 - val_acc: 0.8529
Learning rate:  0.001
Epoch 91/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3367 - acc: 0.8761 - val_loss: 0.3129 - val_acc: 0.9082
Learning rate:  0.001
Epoch 92/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3324 - acc: 0.8859 - val_loss: 0.3054 - val_acc: 0.9044
Learning rate:  0.001
Epoch 93/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3302 - acc: 0.8817 - val_loss: 0.2744 - val_acc: 0.9072
Learning rate:  0.001
Epoch 94/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3359 - acc: 0.8811 - val_loss: 0.3603 - val_acc: 0.8763
Learning rate:  0.001
Epoch 95/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3311 - acc: 0.8842 - val_loss: 0.4800 - val_acc: 0.8838
Learning rate:  0.001
Epoch 96/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3317 - acc: 0.8875 - val_loss: 0.3545 - val_acc: 0.8641
Learning rate:  0.001
Epoch 97/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3302 - acc: 0.8790 - val_loss: 0.5405 - val_acc: 0.8407
Learning rate:  0.001
Epoch 98/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3374 - acc: 0.8771 - val_loss: 0.3435 - val_acc: 0.8725
Learning rate:  0.001
Epoch 99/200
178/177 [==============================] - 38s 211ms/step - loss: 0.3323 - acc: 0.8811 - val_loss: 0.3150 - val_acc: 0.8950
Learning rate:  0.001
Epoch 100/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3267 - acc: 0.8821 - val_loss: 0.2954 - val_acc: 0.9025
Learning rate:  0.001
Epoch 101/200
178/177 [==============================] - 37s 208ms/step - loss: 0.3331 - acc: 0.8839 - val_loss: 0.3146 - val_acc: 0.8950
Learning rate:  0.0001
Epoch 102/200
178/177 [==============================] - 37s 209ms/step - loss: 0.2986 - acc: 0.8954 - val_loss: 0.2755 - val_acc: 0.9110
Learning rate:  0.0001
Epoch 103/200
178/177 [==============================] - 37s 209ms/step - loss: 0.3025 - acc: 0.8960 - val_loss: 0.2722 - val_acc: 0.9110
Learning rate:  0.0001
Epoch 104/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2857 - acc: 0.8992 - val_loss: 0.2694 - val_acc: 0.9100
Learning rate:  0.0001
Epoch 105/200
178/177 [==============================] - 38s 211ms/step - loss: 0.2824 - acc: 0.9050 - val_loss: 0.2733 - val_acc: 0.9053
Learning rate:  0.0001
Epoch 106/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2995 - acc: 0.8920 - val_loss: 0.2734 - val_acc: 0.9072
Learning rate:  0.0001
Epoch 107/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2854 - acc: 0.9032 - val_loss: 0.2788 - val_acc: 0.9100
Learning rate:  0.0001
Epoch 108/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2871 - acc: 0.8982 - val_loss: 0.2814 - val_acc: 0.9044
Learning rate:  0.0001
Epoch 109/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2768 - acc: 0.9047 - val_loss: 0.2699 - val_acc: 0.9063
Learning rate:  0.0001
Epoch 110/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2814 - acc: 0.8995 - val_loss: 0.2770 - val_acc: 0.9100
Learning rate:  0.0001
Epoch 111/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2787 - acc: 0.9033 - val_loss: 0.2883 - val_acc: 0.8969
Learning rate:  0.0001
Epoch 112/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2841 - acc: 0.9031 - val_loss: 0.2714 - val_acc: 0.9100
Learning rate:  0.0001
Epoch 113/200
178/177 [==============================] - 38s 211ms/step - loss: 0.2796 - acc: 0.9048 - val_loss: 0.2806 - val_acc: 0.9053
Learning rate:  0.0001
Epoch 114/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2774 - acc: 0.9038 - val_loss: 0.2736 - val_acc: 0.9072
Learning rate:  0.0001
Epoch 115/200
178/177 [==============================] - 37s 209ms/step - loss: 0.2811 - acc: 0.9045 - val_loss: 0.2641 - val_acc: 0.9128
Learning rate:  0.0001
Epoch 116/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2791 - acc: 0.9059 - val_loss: 0.2762 - val_acc: 0.9091
Learning rate:  0.0001
Epoch 117/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2767 - acc: 0.9040 - val_loss: 0.2825 - val_acc: 0.8969
Learning rate:  0.0001
Epoch 118/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2741 - acc: 0.9048 - val_loss: 0.2710 - val_acc: 0.9119
Learning rate:  0.0001
Epoch 119/200
178/177 [==============================] - 38s 211ms/step - loss: 0.2673 - acc: 0.9068 - val_loss: 0.2677 - val_acc: 0.9110
Learning rate:  0.0001
Epoch 120/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2854 - acc: 0.9031 - val_loss: 0.2784 - val_acc: 0.9044
Learning rate:  0.0001
Epoch 121/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2763 - acc: 0.9048 - val_loss: 0.2724 - val_acc: 0.9053
Learning rate:  0.0001
Epoch 122/200
178/177 [==============================] - 37s 209ms/step - loss: 0.2715 - acc: 0.9051 - val_loss: 0.2631 - val_acc: 0.9175
Learning rate:  0.0001
Epoch 123/200
178/177 [==============================] - 37s 210ms/step - loss: 0.2703 - acc: 0.9077 - val_loss: 0.2913 - val_acc: 0.8997
Learning rate:  0.0001
Epoch 124/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2759 - acc: 0.9011 - val_loss: 0.2770 - val_acc: 0.9072
Learning rate:  0.0001
Epoch 125/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2821 - acc: 0.8984 - val_loss: 0.2841 - val_acc: 0.8978
Learning rate:  0.0001
Epoch 126/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2720 - acc: 0.9030 - val_loss: 0.2684 - val_acc: 0.9110
Learning rate:  0.0001
Epoch 127/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2683 - acc: 0.9053 - val_loss: 0.2692 - val_acc: 0.9082
Learning rate:  0.0001
Epoch 128/200
178/177 [==============================] - 37s 210ms/step - loss: 0.2636 - acc: 0.9141 - val_loss: 0.2649 - val_acc: 0.9138
Learning rate:  0.0001
Epoch 129/200
178/177 [==============================] - 37s 209ms/step - loss: 0.2775 - acc: 0.9041 - val_loss: 0.2882 - val_acc: 0.9025
Learning rate:  0.0001
Epoch 130/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2790 - acc: 0.8986 - val_loss: 0.3008 - val_acc: 0.9016
Learning rate:  0.0001
Epoch 131/200
178/177 [==============================] - 38s 211ms/step - loss: 0.2769 - acc: 0.8998 - val_loss: 0.3137 - val_acc: 0.8941
Learning rate:  0.0001
Epoch 132/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2607 - acc: 0.9123 - val_loss: 0.2746 - val_acc: 0.9100
Learning rate:  0.0001
Epoch 133/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2700 - acc: 0.9055 - val_loss: 0.2675 - val_acc: 0.9063
Learning rate:  0.0001
Epoch 134/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2647 - acc: 0.9047 - val_loss: 0.2671 - val_acc: 0.9138: 0
Learning rate:  0.0001
Epoch 135/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2790 - acc: 0.9048 - val_loss: 0.2729 - val_acc: 0.9007
Learning rate:  0.0001
Epoch 136/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2613 - acc: 0.9096 - val_loss: 0.3145 - val_acc: 0.8913
Learning rate:  0.0001
Epoch 137/200
178/177 [==============================] - 37s 210ms/step - loss: 0.2694 - acc: 0.9037 - val_loss: 0.2890 - val_acc: 0.9100
Learning rate:  0.0001
Epoch 138/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2596 - acc: 0.9088 - val_loss: 0.2596 - val_acc: 0.9100
Learning rate:  0.0001
Epoch 139/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2674 - acc: 0.9082 - val_loss: 0.2805 - val_acc: 0.9091 - loss: 0.2679 - acc: 0
Learning rate:  0.0001
Epoch 140/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2670 - acc: 0.9075 - val_loss: 0.3185 - val_acc: 0.8932
Learning rate:  0.0001
Epoch 141/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2603 - acc: 0.9118 - val_loss: 0.2754 - val_acc: 0.9091
Learning rate:  1e-05
Epoch 142/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2589 - acc: 0.9115 - val_loss: 0.2776 - val_acc: 0.9110
Learning rate:  1e-05
Epoch 143/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2552 - acc: 0.9125 - val_loss: 0.2803 - val_acc: 0.9063
Learning rate:  1e-05
Epoch 144/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2557 - acc: 0.9116 - val_loss: 0.2780 - val_acc: 0.9082
Learning rate:  1e-05
Epoch 145/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2572 - acc: 0.9084 - val_loss: 0.2850 - val_acc: 0.9100
Learning rate:  1e-05
Epoch 146/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2566 - acc: 0.9151 - val_loss: 0.2831 - val_acc: 0.9082
Learning rate:  1e-05
Epoch 147/200
178/177 [==============================] - 37s 210ms/step - loss: 0.2657 - acc: 0.9114 - val_loss: 0.2838 - val_acc: 0.9072
Learning rate:  1e-05
Epoch 148/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2671 - acc: 0.9072 - val_loss: 0.2748 - val_acc: 0.9072
Learning rate:  1e-05
Epoch 149/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2602 - acc: 0.9113 - val_loss: 0.2800 - val_acc: 0.9091
Learning rate:  1e-05
Epoch 150/200
178/177 [==============================] - 38s 211ms/step - loss: 0.2612 - acc: 0.9060 - val_loss: 0.2759 - val_acc: 0.9110
Learning rate:  1e-05
Epoch 151/200
178/177 [==============================] - 37s 210ms/step - loss: 0.2651 - acc: 0.9077 - val_loss: 0.2745 - val_acc: 0.9100
Learning rate:  1e-05
Epoch 152/200
178/177 [==============================] - 37s 210ms/step - loss: 0.2565 - acc: 0.9128 - val_loss: 0.2783 - val_acc: 0.9119
Learning rate:  1e-05
Epoch 153/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2680 - acc: 0.9067 - val_loss: 0.2801 - val_acc: 0.9091
Learning rate:  1e-05
Epoch 154/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2583 - acc: 0.9101 - val_loss: 0.2776 - val_acc: 0.9100
Learning rate:  1e-05
Epoch 155/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2550 - acc: 0.9113 - val_loss: 0.2790 - val_acc: 0.9119
Learning rate:  1e-05
Epoch 156/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2648 - acc: 0.9081 - val_loss: 0.2799 - val_acc: 0.9072
Learning rate:  1e-05
Epoch 157/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2551 - acc: 0.9146 - val_loss: 0.2770 - val_acc: 0.9110
Learning rate:  1e-05
Epoch 158/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2632 - acc: 0.9096 - val_loss: 0.2796 - val_acc: 0.9110
Learning rate:  1e-05
Epoch 159/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2598 - acc: 0.9129 - val_loss: 0.2776 - val_acc: 0.9110
Learning rate:  1e-05
Epoch 160/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2602 - acc: 0.9136 - val_loss: 0.2774 - val_acc: 0.9110
Learning rate:  1e-05
Epoch 161/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2540 - acc: 0.9113 - val_loss: 0.2783 - val_acc: 0.9110 0.2513 - acc: 0.9
Learning rate:  1e-06
Epoch 162/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2626 - acc: 0.9075 - val_loss: 0.2811 - val_acc: 0.9110
Learning rate:  1e-06
Epoch 163/200
178/177 [==============================] - 37s 210ms/step - loss: 0.2676 - acc: 0.9088 - val_loss: 0.2791 - val_acc: 0.9091
Learning rate:  1e-06
Epoch 164/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2527 - acc: 0.9121 - val_loss: 0.2792 - val_acc: 0.9091
Learning rate:  1e-06
Epoch 165/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2668 - acc: 0.9077 - val_loss: 0.2762 - val_acc: 0.9110
Learning rate:  1e-06
Epoch 166/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2555 - acc: 0.9155 - val_loss: 0.2797 - val_acc: 0.9110
Learning rate:  1e-06
Epoch 167/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2592 - acc: 0.9125 - val_loss: 0.2769 - val_acc: 0.9091
Learning rate:  1e-06
Epoch 168/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2452 - acc: 0.9156 - val_loss: 0.2761 - val_acc: 0.9100
Learning rate:  1e-06
Epoch 169/200
178/177 [==============================] - 38s 211ms/step - loss: 0.2568 - acc: 0.9121 - val_loss: 0.2795 - val_acc: 0.91102568 - acc:
Learning rate:  1e-06
Epoch 170/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2533 - acc: 0.9114 - val_loss: 0.2775 - val_acc: 0.9119
Learning rate:  1e-06
Epoch 171/200
178/177 [==============================] - 38s 211ms/step - loss: 0.2602 - acc: 0.9113 - val_loss: 0.2780 - val_acc: 0.9119
Learning rate:  1e-06
Epoch 172/200
178/177 [==============================] - 38s 212ms/step - loss: 0.2560 - acc: 0.9106 - val_loss: 0.2786 - val_acc: 0.9100
Learning rate:  1e-06
Epoch 173/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2548 - acc: 0.9148 - val_loss: 0.2761 - val_acc: 0.9119
Learning rate:  1e-06
Epoch 174/200
178/177 [==============================] - 37s 210ms/step - loss: 0.2508 - acc: 0.9160 - val_loss: 0.2776 - val_acc: 0.9110
Learning rate:  1e-06
Epoch 175/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2623 - acc: 0.9078 - val_loss: 0.2789 - val_acc: 0.9110
Learning rate:  1e-06
Epoch 176/200
178/177 [==============================] - 37s 210ms/step - loss: 0.2592 - acc: 0.9116 - val_loss: 0.2789 - val_acc: 0.9110 2s - loss: 0.2620 -
Learning rate:  1e-06
Epoch 177/200
178/177 [==============================] - 37s 211ms/step - loss: 0.2544 - acc: 0.9120 - val_loss: 0.2804 - val_acc: 0.9119
Learning rate:  1e-06
Epoch 178/200
178/177 [==============================] - 37s 210ms/step - loss: 0.2506 - acc: 0.9127 - val_loss: 0.2794 - val_acc: 0.9110
Learning rate:  1e-06
Epoch 179/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2622 - acc: 0.9085 - val_loss: 0.2789 - val_acc: 0.9110
Learning rate:  1e-06
Epoch 180/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2579 - acc: 0.9141 - val_loss: 0.2792 - val_acc: 0.9110
Learning rate:  1e-06
Epoch 181/200
178/177 [==============================] - 37s 210ms/step - loss: 0.2537 - acc: 0.9183 - val_loss: 0.2797 - val_acc: 0.9091
Learning rate:  1e-06
Epoch 182/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2532 - acc: 0.9109 - val_loss: 0.2777 - val_acc: 0.9119
Learning rate:  1e-06
Epoch 183/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2630 - acc: 0.9118 - val_loss: 0.2796 - val_acc: 0.9119
Learning rate:  1e-06
Epoch 184/200
178/177 [==============================] - 37s 210ms/step - loss: 0.2474 - acc: 0.9144 - val_loss: 0.2808 - val_acc: 0.9110
Learning rate:  1e-06
Epoch 185/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2487 - acc: 0.9136 - val_loss: 0.2791 - val_acc: 0.9110
Learning rate:  1e-06
Epoch 186/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2537 - acc: 0.9126 - val_loss: 0.2799 - val_acc: 0.9119
Learning rate:  1e-06
Epoch 187/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2681 - acc: 0.9084 - val_loss: 0.2798 - val_acc: 0.9100
Learning rate:  1e-06
Epoch 188/200
178/177 [==============================] - 37s 210ms/step - loss: 0.2562 - acc: 0.9164 - val_loss: 0.2805 - val_acc: 0.9119
Learning rate:  1e-06
Epoch 189/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2574 - acc: 0.9118 - val_loss: 0.2784 - val_acc: 0.9119
Learning rate:  1e-06
Epoch 190/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2570 - acc: 0.9071 - val_loss: 0.2792 - val_acc: 0.9119
Learning rate:  1e-06
Epoch 191/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2520 - acc: 0.9199 - val_loss: 0.2805 - val_acc: 0.9110
Learning rate:  5e-07
Epoch 192/200
178/177 [==============================] - 38s 211ms/step - loss: 0.2611 - acc: 0.9086 - val_loss: 0.2792 - val_acc: 0.9100
Learning rate:  5e-07
Epoch 193/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2492 - acc: 0.9136 - val_loss: 0.2808 - val_acc: 0.9119
Learning rate:  5e-07
Epoch 194/200
178/177 [==============================] - 37s 209ms/step - loss: 0.2554 - acc: 0.9120 - val_loss: 0.2784 - val_acc: 0.9119
Learning rate:  5e-07
Epoch 195/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2584 - acc: 0.9121 - val_loss: 0.2781 - val_acc: 0.9110
Learning rate:  5e-07
Epoch 196/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2587 - acc: 0.9109 - val_loss: 0.2779 - val_acc: 0.9091
Learning rate:  5e-07
Epoch 197/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2469 - acc: 0.9183 - val_loss: 0.2801 - val_acc: 0.9119
Learning rate:  5e-07
Epoch 198/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2618 - acc: 0.9082 - val_loss: 0.2781 - val_acc: 0.9110
Learning rate:  5e-07
Epoch 199/200
178/177 [==============================] - 37s 210ms/step - loss: 0.2601 - acc: 0.9086 - val_loss: 0.2787 - val_acc: 0.9119
Learning rate:  5e-07
Epoch 200/200
178/177 [==============================] - 37s 208ms/step - loss: 0.2466 - acc: 0.9147 - val_loss: 0.2784 - val_acc: 0.9119
Out[12]:
<tensorflow.python.keras.callbacks.History at 0x1ea923fe358>
In [13]:
## Now the training is complete, we get
# another object to load the weights
# compile it, so that we can do 
# final evaluation on it
modelGo.load_weights(filepath)
modelGo.compile(loss='categorical_crossentropy', 
                optimizer='adam', 
                metrics=['accuracy'])
In [14]:
# Make classification on the test dataset
predicts    = modelGo.predict(tsDat)

# Prepare the classification output
# for the classification report
predout     = np.argmax(predicts,axis=1)
testout     = np.argmax(tsLbl,axis=1)
labelname   = ['non-flower', 'flower']
                                            # the labels for the classfication report


testScores  = metrics.accuracy_score(testout,predout)
confusion   = metrics.confusion_matrix(testout,predout)


print("Best accuracy (on testing dataset): %.2f%%" % (testScores*100))
print(metrics.classification_report(testout,predout,target_names=labelname,digits=4))
print(confusion)
Best accuracy (on testing dataset): 91.75%
              precision    recall  f1-score   support

  non-flower     0.8996    0.9075    0.9035       454
      flower     0.9310    0.9250    0.9280       613

    accuracy                         0.9175      1067
   macro avg     0.9153    0.9162    0.9157      1067
weighted avg     0.9176    0.9175    0.9176      1067

[[412  42]
 [ 46 567]]
In [15]:
import pandas as pd

records     = pd.read_csv(modelname +'.csv')
plt.figure()
plt.subplot(211)
plt.plot(records['val_loss'])
plt.plot(records['loss'])
plt.yticks([0, 0.20, 0.30, 0.4, 0.5])
plt.title('Loss value',fontsize=12)

ax          = plt.gca()
ax.set_xticklabels([])



plt.subplot(212)
plt.plot(records['val_acc'])
plt.plot(records['acc'])
plt.yticks([0.7, 0.8, 0.9, 1.0])
plt.title('Accuracy',fontsize=12)
plt.show()
In [16]:
wrong_ans_index = []

for i in range(len(predout)):
    if predout[i] != testout[i]:
        wrong_ans_index.append(i)
In [17]:
wrong_ans_index = list(set(wrong_ans_index))
In [18]:
# Randomly show X examples of that was wrong

dataset = tsDatOrg #flowers #fungus #rocks

for index in wrong_ans_index:
    #index = wrong_ans_index[random.randint(0, len(wrong_ans_index)-1)]
    print("Showing %s index image" %(index))
    print("Predicted as %s but is actually %s" %(predout[index], testout[index]))
    imgplot = plt.imshow(data[index])
    plt.show()
Showing 12 index image
Predicted as 0 but is actually 1
Showing 1047 index image
Predicted as 1 but is actually 0
Showing 539 index image
Predicted as 0 but is actually 1
Showing 540 index image
Predicted as 0 but is actually 1
Showing 1054 index image
Predicted as 1 but is actually 0
Showing 1056 index image
Predicted as 1 but is actually 0
Showing 545 index image
Predicted as 0 but is actually 1
Showing 37 index image
Predicted as 0 but is actually 1
Showing 72 index image
Predicted as 0 but is actually 1
Showing 596 index image
Predicted as 0 but is actually 1
Showing 599 index image
Predicted as 0 but is actually 1
Showing 92 index image
Predicted as 0 but is actually 1
Showing 615 index image
Predicted as 1 but is actually 0
Showing 616 index image
Predicted as 1 but is actually 0
Showing 108 index image
Predicted as 0 but is actually 1
Showing 110 index image
Predicted as 0 but is actually 1
Showing 113 index image
Predicted as 0 but is actually 1
Showing 131 index image
Predicted as 0 but is actually 1
Showing 133 index image
Predicted as 0 but is actually 1
Showing 645 index image
Predicted as 1 but is actually 0
Showing 137 index image
Predicted as 0 but is actually 1
Showing 650 index image
Predicted as 1 but is actually 0
Showing 147 index image
Predicted as 0 but is actually 1
Showing 148 index image
Predicted as 0 but is actually 1
Showing 662 index image
Predicted as 1 but is actually 0
Showing 675 index image
Predicted as 1 but is actually 0
Showing 176 index image
Predicted as 0 but is actually 1
Showing 695 index image
Predicted as 1 but is actually 0
Showing 712 index image
Predicted as 1 but is actually 0
Showing 204 index image
Predicted as 0 but is actually 1
Showing 205 index image
Predicted as 0 but is actually 1
Showing 220 index image
Predicted as 0 but is actually 1
Showing 733 index image
Predicted as 1 but is actually 0
Showing 743 index image
Predicted as 1 but is actually 0
Showing 233 index image
Predicted as 0 but is actually 1
Showing 752 index image
Predicted as 1 but is actually 0
Showing 250 index image
Predicted as 0 but is actually 1
Showing 775 index image
Predicted as 1 but is actually 0
Showing 269 index image
Predicted as 0 but is actually 1
Showing 790 index image
Predicted as 1 but is actually 0
Showing 279 index image
Predicted as 0 but is actually 1
Showing 791 index image
Predicted as 1 but is actually 0
Showing 792 index image
Predicted as 1 but is actually 0
Showing 288 index image
Predicted as 0 but is actually 1
Showing 805 index image
Predicted as 1 but is actually 0
Showing 806 index image
Predicted as 1 but is actually 0
Showing 300 index image
Predicted as 0 but is actually 1
Showing 821 index image
Predicted as 1 but is actually 0
Showing 311 index image
Predicted as 0 but is actually 1
Showing 830 index image
Predicted as 1 but is actually 0
Showing 834 index image
Predicted as 1 but is actually 0
Showing 329 index image
Predicted as 0 but is actually 1
Showing 845 index image
Predicted as 1 but is actually 0
Showing 336 index image
Predicted as 0 but is actually 1
Showing 852 index image
Predicted as 1 but is actually 0
Showing 347 index image
Predicted as 0 but is actually 1
Showing 349 index image
Predicted as 0 but is actually 1
Showing 365 index image
Predicted as 0 but is actually 1
Showing 367 index image
Predicted as 0 but is actually 1
Showing 375 index image
Predicted as 0 but is actually 1
Showing 889 index image
Predicted as 1 but is actually 0
Showing 893 index image
Predicted as 1 but is actually 0
Showing 382 index image
Predicted as 0 but is actually 1
Showing 384 index image
Predicted as 0 but is actually 1
Showing 904 index image
Predicted as 1 but is actually 0
Showing 909 index image
Predicted as 1 but is actually 0
Showing 398 index image
Predicted as 0 but is actually 1
Showing 914 index image
Predicted as 1 but is actually 0
Showing 915 index image
Predicted as 1 but is actually 0
Showing 919 index image
Predicted as 1 but is actually 0
Showing 921 index image
Predicted as 1 but is actually 0
Showing 420 index image
Predicted as 0 but is actually 1
Showing 942 index image
Predicted as 1 but is actually 0
Showing 947 index image
Predicted as 1 but is actually 0
Showing 955 index image
Predicted as 1 but is actually 0
Showing 958 index image
Predicted as 1 but is actually 0
Showing 451 index image
Predicted as 0 but is actually 1
Showing 964 index image
Predicted as 1 but is actually 0
Showing 975 index image
Predicted as 1 but is actually 0
Showing 469 index image
Predicted as 0 but is actually 1
Showing 982 index image
Predicted as 1 but is actually 0
Showing 471 index image
Predicted as 0 but is actually 1
Showing 474 index image
Predicted as 0 but is actually 1
Showing 487 index image
Predicted as 0 but is actually 1
Showing 498 index image
Predicted as 0 but is actually 1
Showing 1015 index image
Predicted as 1 but is actually 0
Showing 505 index image
Predicted as 0 but is actually 1
Showing 1022 index image
Predicted as 1 but is actually 0
In [19]:
# Stacking 3 NNs?